| Conditions | 7 |
| Total Lines | 75 |
| Code Lines | 47 |
| Lines | 0 |
| Ratio | 0 % |
| Changes | 0 | ||
Small methods make your code easier to understand, in particular if combined with a good name. Besides, if your method is small, finding a good name is usually much easier.
For example, if you find yourself adding comments to a method's body, this is usually a good sign to extract the commented part to a new method, and use the comment as a starting point when coming up with a good name for this new method.
Commonly applied refactorings include:
If many parameters/temporary variables are present:
| 1 | import archiver from "archiver"; |
||
| 101 | |||
| 102 | export function backup() { |
||
| 103 | const fn = getFuncName(); |
||
| 104 | |||
| 105 | // create a file to stream archive data to. |
||
| 106 | var output = fs.createWriteStream(root + "/tmp/backup.zip"); |
||
| 107 | var archive = archiver("zip", { |
||
| 108 | zlib: { level: 9 }, // Sets the compression level. |
||
| 109 | }); |
||
| 110 | |||
| 111 | // listen for all archive data to be written |
||
| 112 | // 'close' event is fired only when a file descriptor is involved |
||
| 113 | output.on("close", function () { |
||
| 114 | console.log(archive.pointer() + " total bytes"); |
||
| 115 | console.log( |
||
| 116 | "archiver has been finalized and the output file descriptor has closed." |
||
| 117 | ); |
||
| 118 | }); |
||
| 119 | |||
| 120 | // This event is fired when the data source is drained no matter what was the data source. |
||
| 121 | // It is not part of this library but rather from the NodeJS Stream API. |
||
| 122 | // @see: https://nodejs.org/api/stream.html#stream_event_end |
||
| 123 | output.on("end", function () { |
||
| 124 | console.log("Data has been drained"); |
||
| 125 | }); |
||
| 126 | |||
| 127 | // good practice to catch warnings (ie stat failures and other non-blocking errors) |
||
| 128 | archive.on("warning", function (err) { |
||
| 129 | if (err.code === "ENOENT") { |
||
| 130 | // log warning |
||
| 131 | } else { |
||
| 132 | // throw error |
||
| 133 | throw err; |
||
| 134 | } |
||
| 135 | }); |
||
| 136 | |||
| 137 | // good practice to catch this error explicitly |
||
| 138 | archive.on("error", function (err) { |
||
| 139 | throw err; |
||
| 140 | }); |
||
| 141 | |||
| 142 | // pipe archive data to the file |
||
| 143 | archive.pipe(output); |
||
| 144 | /** |
||
| 145 | * Process archive |
||
| 146 | * @param files |
||
| 147 | */ |
||
| 148 | const archive_now = function (files: string[]) { |
||
| 149 | if (files.length) { |
||
| 150 | if (!fs.existsSync(files[0])) { |
||
| 151 | if (!fs.existsSync(fromRoot(files[0]))) { |
||
| 152 | log.log(log.error(`${fn}(${files[0]}) not found`)); |
||
| 153 | return; |
||
| 154 | } |
||
| 155 | files[0] = fromRoot(files[0]); |
||
| 156 | } |
||
| 157 | var type = fs.lstatSync(files[0]); |
||
| 158 | var filename = path.basename(files[0]); |
||
| 159 | if (type.isFile()) { |
||
| 160 | archive.append(fs.createReadStream(files[0]), { |
||
| 161 | name: filename, |
||
| 162 | }); |
||
| 163 | } else if (type.isDirectory()) { |
||
| 164 | archive.directory(fromRoot(files[0]), filename); |
||
| 165 | } |
||
| 166 | files.shift(); |
||
| 167 | archive_now(files); |
||
| 168 | } |
||
| 169 | }; |
||
| 170 | |||
| 171 | archive_now( |
||
| 172 | readdir(fromRoot("/"), false, null, null, { folders: true, files: false }) |
||
| 173 | ); |
||
| 174 | archive_now(["src", "assets", ".vscode", "views", "libs"]); |
||
| 175 | |||
| 176 | // finalize the archive (ie we are done appending files but streams have to finish yet) |
||
| 184 |